gl renderer: Declare color matrix child separately
authorTimm Bäder <mail@baedert.org>
Sun, 31 Mar 2019 05:50:33 +0000 (07:50 +0200)
committerTimm Bäder <mail@baedert.org>
Sat, 13 Apr 2019 04:00:02 +0000 (06:00 +0200)
Otherwise I'm doing this all the time when debugging.

gsk/gl/gskglrenderer.c

index 3690924597b65716f390559a0e35613fa2f0f62e..7d00ecb2900e12ce07becb8f2e7ab2cc7e0c8c9c 100644 (file)
@@ -1215,13 +1215,14 @@ render_color_matrix_node (GskGLRenderer       *self,
   const float min_y = builder->dy + node->bounds.origin.y;
   const float max_x = min_x + node->bounds.size.width;
   const float max_y = min_y + node->bounds.size.height;
+  GskRenderNode *child = gsk_color_matrix_node_get_child (node);
   int texture_id;
   gboolean is_offscreen;
 
   /* Pass min_x/max_x/min_y/max_y without builder->dx/dy! */
   add_offscreen_ops (self, builder,
                      &node->bounds,
-                     gsk_color_matrix_node_get_child (node),
+                     child,
                      &texture_id, &is_offscreen,
                      RESET_CLIP | RESET_OPACITY);